In [1]:
from neatplots.predefined import four as palette
from matplotlib.patches import Rectangle
from matplotlib.gridspec import GridSpec, GridSpecFromSubplotSpec

from plume.prediction import OnlineGP, RBFKernel, ExponentialKernel, Matern32Kernel, Matern52Kernel

%pylab inline
%load_ext autoreload
%autoreload 2

gray = (0.3, 0.3, 0.3)
seed(64321)

import latexstyle
latexstyle.setup()


Populating the interactive namespace from numpy and matplotlib

In [2]:
x_range = (0, 7)
y_range = (-1.5, 2)
fn = lambda x: np.sin(x)

xs = np.linspace(x_range[0], x_range[1], 100)
ys = fn(xs)

noise_std = 0
train_x = x_range[0] + (x_range[1] - x_range[0]) * rand(5)
train_y = fn(train_x) + noise_std * randn(len(train_x))

In [3]:
kernel = RBFKernel(1)
gp = OnlineGP(kernel, noise_std ** 2)
gp.fit(np.atleast_2d(train_x).T, np.atleast_2d(train_y).T)

In [4]:
mean, mse = gp.predict(np.atleast_2d(xs).T, eval_MSE=True)

fig = plt.figure(figsize=(2.36, 1.57))
ax = fig.add_subplot(1, 1, 1)
ax.fill_between(
    xs, np.squeeze(mean) - np.sqrt(mse), np.squeeze(mean) + np.sqrt(mse),
    color=tuple(palette.thin[1]) + (0.2,), edgecolor='none')
ax.plot(xs, mean, color=palette.thin[1], label=r'$\mu(x)$')
ax.scatter(train_x, train_y, marker='+', s=24, color=palette.thin[1], label=r'$X$')

latexstyle.style_axes(ax)
ax.set_xlim(*x_range)
ax.set_ylim(*y_range)
ax.set_xlabel(r'$x$', labelpad=0)
ax.set_ylabel(r'$y$', labelpad=0, rotation='horizontal', verticalalignment='center')

handles, labels = ax.get_legend_handles_labels()
# Move Observations legend
handles.insert(0, handles.pop(1))
labels.insert(0, labels.pop(1))
# Insert Predictive SD legend
handles.insert(2, Rectangle((0, 0), 1, 1, fc=tuple(palette.thin[1]) + (0.2,), ec='none'))
labels.insert(2, r'$\pm \sigma(x)$')

ax.legend(handles, labels, ncol=3, loc='upper center', bbox_to_anchor=(0.5, 1.05), frameon=False, columnspacing=1.5, handletextpad=0.2)


Out[4]:
<matplotlib.legend.Legend at 0x1164f08d0>

In [5]:
fig.savefig('../../thesis/plots/gp.pdf')

In [6]:
mean, cv = gp.predict(np.atleast_2d(xs).T, eval_MSE=True, complete_cov=True)

fig = plt.figure(figsize=(2.36, 1.57))
ax = fig.add_subplot(1, 1, 1)
for i in xrange(3):
    ax.plot(xs, multivariate_normal(np.squeeze(mean), cv), color=palette.thin[(i + 2) % 4])
ax.scatter(train_x, train_y, marker='+', s=24, color=palette.thin[1], label=r'$X_*$')

latexstyle.style_axes(ax)
ax.set_xlim(*x_range)
ax.set_ylim(*y_range)
ax.set_xlabel(r'$x$', labelpad=0)
#ax.set_ylabel(r'$y$', labelpad=0, rotation='horizontal', verticalalignment='center')
plt.setp(ax.get_yticklabels(), visible=False)


Out[6]:
[None, None, None, None, None, None, None, None]

In [7]:
fig.savefig('../../thesis/plots/gp_sample.pdf')

Lengthscale vis


In [8]:
gauss = lambda x: 1.0 / np.sqrt(2.0 * np.pi) * np.exp(-(x ** 2) / 2.0)

x_range = (-14.5, 7)
y_range = (-1.25, 1.25)

xs = np.linspace(x_range[0], x_range[1], 100)
ys = gauss(xs)

train_x = np.array([[-7.5, -6, -4.5, -3, -1.5, 0]]).T
train_y = gauss(train_x)

In [9]:
fig = plt.figure(figsize=(5.35, 1.77))

legend_height = 0.05
legend_grid = GridSpec(2, 1, height_ratios=(1.0 - legend_height, legend_height), hspace=0.35)
grid = GridSpecFromSubplotSpec(1, 2, subplot_spec=legend_grid[0], wspace=0.1)

gp = OnlineGP(Matern32Kernel(1), 0)
gp.fit(train_x, train_y)
mean, mse = gp.predict(np.atleast_2d(xs).T, eval_MSE=True)

ax = fig.add_subplot(grid[0])
line, = ax.plot(xs, ys, '--', c=gray, label='Target function')
line.set_dashes((3, 3))
ax.fill_between(
    xs, np.squeeze(mean) - np.sqrt(mse), np.squeeze(mean) + np.sqrt(mse),
    color=tuple(palette.thin[1]) + (0.2,), edgecolor='none')
ax.plot(xs, mean, color=palette.thin[1], label=r'$\mu(x)$')
ax.scatter(train_x, train_y, marker='+', s=24, color=palette.thin[1], label=r'$X_*$')

latexstyle.style_axes(ax)
ax.set_xlim(*x_range)
ax.set_ylim(*y_range)
ax.set_xlabel(r'$x$', labelpad=0)
ax.set_ylabel(r'$y$', labelpad=0, rotation='horizontal', verticalalignment='center')
ax.set_title(r'$\ell = 1$')

gp = OnlineGP(Matern32Kernel(10), 0)
gp.fit(train_x, train_y)
mean, mse = gp.predict(np.atleast_2d(xs).T, eval_MSE=True)

ax = fig.add_subplot(grid[1])
line, = ax.plot(xs, ys, '--', c=gray, label='Target function')
line.set_dashes((3, 3))
ax.fill_between(
    xs, np.squeeze(mean) - np.sqrt(mse), np.squeeze(mean) + np.sqrt(mse),
    color=tuple(palette.thin[1]) + (0.2,), edgecolor='none')
ax.plot(xs, mean, color=palette.thin[1], label=r'$\mu(x)$')
ax.scatter(train_x, train_y, marker='+', s=24, color=palette.thin[1], label=r'$X$')

latexstyle.style_axes(ax)
ax.set_xlim(*x_range)
ax.set_ylim(*y_range)
ax.set_xlabel(r'$x$', labelpad=0)
plt.setp(ax.get_yticklabels(), visible=False)
ax.set_title(r'$\ell = 10$')

handles, labels = ax.get_legend_handles_labels()
# Move Observations legend
handles.insert(1, handles.pop(2))
labels.insert(1, labels.pop(2))
# Insert Predictive SD legend
handles.insert(3, Rectangle((0, 0), 1, 1, fc=tuple(palette.thin[1]) + (0.2,), ec='none'))
labels.insert(3, r'$\pm \sigma(x)$')

ax = fig.add_subplot(legend_grid[1])
ax.set_axis_off()
ax.legend(handles, labels, ncol=4, loc='upper center', bbox_to_anchor=(0.5, 1.0), frameon=False, columnspacing=1.5, handletextpad=0.2)


Out[9]:
<matplotlib.legend.Legend at 0x116769450>

In [10]:
fig.savefig('../../thesis/plots/gp-lengthscale.pdf')

Kernels


In [11]:
x_range = (-4, 4)
xs = np.linspace(x_range[0], x_range[1], 250)

fig = plt.figure(figsize=(3, 1.5))
ax = fig.add_subplot(1, 1, 1)

kernels = [
    (r'$k_{\mathrm{SE}}(r)$', RBFKernel(1)),
    (r'$k_{5/2}(r)$', Matern52Kernel(1)),
    (r'$k_{3/2}(r)$', Matern32Kernel(1)),
    (r'$k_{\mathrm{exp}}(r)$', ExponentialKernel(1))]
styles = [tuple(), (3, 2), (3, 1, 0.4, 1), (0.4, 1)]

for (name, k), c, s in zip(kernels, palette.thin, styles):
    line, = ax.plot(xs, np.squeeze(k(np.atleast_2d(xs).T, np.array([[0]]))), c=c, label=name)
    line.set_dashes(s)
    
latexstyle.style_axes(ax)
ax.set_xlim(*x_range)
ax.set_ylim(0, 1)
ax.set_xlabel(r'$r$', labelpad=0)
ax.set_ylabel(r'$k(r)$', labelpad=9, rotation='horizontal', verticalalignment='center')
ax.legend(frameon=False, columnspacing=1.5, handletextpad=0.2)


Out[11]:
<matplotlib.legend.Legend at 0x116393750>

In [12]:
fig.savefig('../../thesis/plots/kernels.pdf')

In [13]:
fig = plt.figure(figsize=(3, 1.5))
ax = fig.add_subplot(1, 1, 1)

xs2 = np.atleast_2d(xs).T
for (name, k), c, s in zip(kernels, palette.thin, styles):
    ys = multivariate_normal(np.zeros_like(xs), k(xs2, xs2))
    line, = ax.plot(xs, ys, c=c, label=name)
    line.set_dashes(s)
    
latexstyle.style_axes(ax)
ax.set_xlim(*x_range)
ax.set_xlabel(r'$x$', labelpad=0)
ax.set_ylabel(r'$y$', labelpad=0, rotation='horizontal', verticalalignment='center')


Out[13]:
<matplotlib.text.Text at 0x116387f90>

In [14]:
fig.savefig('../../thesis/plots/kernsamples.pdf')

In [ ]: